Close

@InProceedings{MedinaRodriguezHash:2011:CoDiOp,
               author = "Medina Rodriguez, Rosario A. and Hashimoto, Ronaldo Fumio",
          affiliation = "{University of Sao Paulo} and {University of Sao Paulo}",
                title = "Combining Dialectical Optimization and Gradient Descent Methods 
                         for Improving the Accuracy of Straight Line Segment Classifiers",
            booktitle = "Proceedings...",
                 year = "2011",
               editor = "Lewiner, Thomas and Torres, Ricardo",
         organization = "Conference on Graphics, Patterns and Images, 24. (SIBGRAPI)",
            publisher = "IEEE Computer Society",
              address = "Los Alamitos",
             keywords = "straight line segments, gradient descent technique, dialectical 
                         optimization, genetic algorithms, pattern recognition.",
             abstract = "A recent published pattern recognition technique called Straight 
                         Line Segment (SLS) uses two sets of straight line segments to 
                         classify a set of points from two different classes and it is 
                         based on distances between these points and each set of straight 
                         line segments. It has been demonstrated that, using this 
                         technique, it is possible to generate classifiers which can reach 
                         high accuracy rates for supervised pattern classification. 
                         However, a critical issue in this technique is to find the optimal 
                         positions of the straight line segments given a training data set. 
                         This paper proposes a combining method of the dialectical 
                         optimization method (DOM) and the gradient descent technique for 
                         solving this optimization problem. The main advantage of DOM, such 
                         as any evolutionary algorithm, is the capability of escaping from 
                         local optimum by multi-point stochastic searching. On the other 
                         hand, the strength of gradient descent method is the ability of 
                         finding local optimum by pointing the direction that maximizes the 
                         objective function. Our hybrid method combines the main 
                         characteristics of these two methods. We have applied our 
                         combining approach to several data sets obtained from artificial 
                         distributions and UCI databases. These experiments show that the 
                         proposed algorithm in most cases has higher classification rates 
                         with respect to single gradient descent method and the combination 
                         of gradient descent with genetic algorithms.",
  conference-location = "Macei{\'o}, AL, Brazil",
      conference-year = "28-31 Aug. 2011",
                  doi = "10.1109/SIBGRAPI.2011.8",
                  url = "http://dx.doi.org/10.1109/SIBGRAPI.2011.8",
             language = "en",
                  ibi = "8JMKD3MGPBW34M/3A3H72S",
                  url = "http://urlib.net/ibi/8JMKD3MGPBW34M/3A3H72S",
           targetfile = "Combining Dialectical Optimization and Gradient Descent Methods 
                         for Improving the Accuracy of Straight Line Segment 
                         Classifiers.pdf",
        urlaccessdate = "2024, Apr. 30"
}


Close